In [1]:
import glob
import math
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import random
import sklearn.metrics as metrics

from tensorflow.keras import optimizers
from tensorflow.keras.callbacks import ModelCheckpoint, CSVLogger, LearningRateScheduler
from tensorflow.keras.models import Model
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import add, concatenate, Conv2D, Dense, Dropout, Flatten, Input
from tensorflow.keras.layers import Activation, AveragePooling2D, BatchNormalization, MaxPooling2D
from tensorflow.keras.layers import ZeroPadding2D
from tensorflow.keras.regularizers import l2
from tensorflow.keras.utils import to_categorical


%matplotlib inline
In [2]:
                            # Set up 'ggplot' style
plt.style.use('ggplot')     # if want to use the default style, set 'classic'
plt.rcParams['ytick.right']     = True
plt.rcParams['ytick.labelright']= True
plt.rcParams['ytick.left']      = False
plt.rcParams['ytick.labelleft'] = False
plt.rcParams['font.family']     = 'Arial'
In [3]:
# where am i?
%pwd
Out[3]:
'C:\\Users\\david\\Documents\\ImageNet'
In [4]:
%ls
 Volume in drive C is Acer
 Volume Serial Number is F2E5-64E8

 Directory of C:\Users\david\Documents\ImageNet

09/15/2019  01:19 AM    <DIR>          .
09/15/2019  01:19 AM    <DIR>          ..
09/09/2019  01:02 AM                43 .gitattributes
08/22/2019  11:06 PM                26 .gitignore
09/14/2019  05:05 PM    <DIR>          .ipynb_checkpoints
09/14/2019  04:34 PM         1,216,519 Create_Train_Test_Set.ipynb
09/14/2019  03:53 PM    <DIR>          data
08/22/2019  11:09 PM           455,126 Download-ImageNet.html
09/09/2019  12:35 AM           288,923 Download-ImageNet.ipynb
09/03/2019  09:40 PM           367,769 Download-Pexels.html
09/09/2019  12:35 AM            94,549 Download-Pexels.ipynb
09/09/2019  01:02 AM        10,518,772 fgs-imgs.npz
09/08/2019  11:18 PM        41,976,052 fgs-imgs128.npz
09/08/2019  11:18 PM        23,611,636 fgs-imgs96.npz
09/14/2019  03:57 PM        49,130,740 fgsOpnImg-imgs96.npz
09/15/2019  01:14 AM                 0 FlowerPower.csv
09/15/2019  01:12 AM         1,646,480 FlowerPower.hdf5
09/14/2019  03:06 PM       226,409,716 flr102-imgs96.npz
09/09/2019  01:02 AM        15,728,884 flr-imgs.npz
09/08/2019  11:18 PM        62,374,132 flr-imgs128.npz
09/08/2019  11:18 PM        35,085,556 flr-imgs96.npz
09/09/2019  01:02 AM        13,295,860 flrnonflr-test-imgs.npz
09/08/2019  11:18 PM        52,445,428 flrnonflr-test-imgs128.npz
09/08/2019  11:18 PM        29,500,660 flrnonflr-test-imgs96-0.8.npz
09/14/2019  04:13 PM       102,187,252 flrnonflr-test-imgs96-0.8+.npz
09/08/2019  11:18 PM        14,764,276 flrnonflr-test-imgs96-0.9.npz
09/09/2019  01:02 AM             8,900 flrnonflr-test-labels.npz
09/08/2019  11:18 PM             8,780 flrnonflr-test-labels128.npz
09/08/2019  11:18 PM             8,780 flrnonflr-test-labels96-0.8.npz
09/14/2019  07:39 PM            29,812 flrnonflr-test-labels96-0.8+.npz
09/08/2019  11:18 PM             4,516 flrnonflr-test-labels96-0.9.npz
09/09/2019  01:02 AM        53,133,556 flrnonflr-train-imgs.npz
09/08/2019  11:18 PM       209,584,372 flrnonflr-train-imgs128.npz
09/08/2019  11:18 PM       117,891,316 flrnonflr-train-imgs96-0.8.npz
09/14/2019  04:13 PM       408,748,276 flrnonflr-train-imgs96-0.8+.npz
09/08/2019  11:18 PM       132,627,700 flrnonflr-train-imgs96-0.9.npz
09/09/2019  01:02 AM            34,836 flrnonflr-train-labels.npz
09/08/2019  11:18 PM            34,356 flrnonflr-train-labels128.npz
09/08/2019  11:18 PM            34,356 flrnonflr-train-labels96-0.8.npz
09/14/2019  04:13 PM           118,516 flrnonflr-train-labels96-0.8+.npz
09/08/2019  11:18 PM            38,620 flrnonflr-train-labels96-0.9.npz
08/17/2019  11:53 AM           124,162 ImageNet-Flowers.txt
08/17/2019  03:54 PM            75,692 ImageNet-Fungus.txt
08/17/2019  03:57 PM            81,424 ImageNet-Rocks.txt
09/15/2019  01:08 AM            58,263 Inception-v4.ipynb
09/14/2019  11:39 PM            26,103 model.pdf
09/14/2019  07:39 PM    <DIR>          npz
09/03/2019  09:40 PM           128,688 Pexels-Flowers.txt
09/03/2019  09:40 PM            28,575 Pexels-Umbrellas.txt
09/09/2019  01:02 AM        22,733,044 pxl_flr-imgs.npz
09/08/2019  11:18 PM        88,080,628 pxl_flr-imgs128.npz
09/08/2019  11:18 PM        49,545,460 pxl_flr-imgs96.npz
09/09/2019  01:02 AM         5,173,492 pxl_umb-imgs.npz
09/08/2019  11:18 PM        20,594,932 pxl_umb-imgs128.npz
09/08/2019  11:18 PM        11,584,756 pxl_umb-imgs96.npz
09/09/2019  01:02 AM        12,275,956 rck-imgs.npz
09/08/2019  11:18 PM        49,004,788 rck-imgs128.npz
09/08/2019  11:18 PM        27,565,300 rck-imgs96.npz
09/14/2019  04:01 PM    <DIR>          readings
08/22/2019  11:02 PM                44 README.md
09/14/2019  04:21 PM           417,457 Reshape_Resize_Images.ipynb
09/09/2019  12:48 AM         8,546,104 train_Neural_Network (Conv2D, 96-0.8).html
09/14/2019  08:36 PM         7,071,416 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer + RMSProp + Added data, try9).html
09/11/2019  01:01 AM         4,494,650 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer + RMSProp, try6).html
09/11/2019  10:59 PM         6,116,768 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer + RMSProp, try7).html
09/12/2019  02:35 AM         5,851,809 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer + RMSProp, try8).html
09/09/2019  03:08 AM         3,900,219 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer, try3).html
09/09/2019  11:09 PM         6,528,529 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer, try4).html
09/10/2019  08:44 PM         6,636,754 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer, try5).html
09/09/2019  01:32 AM         6,583,279 train_Neural_Network (ResNetV1, 96-0.8, Dropout, try1).html
09/09/2019  02:40 AM         6,300,696 train_Neural_Network (ResNetV1, 96-0.8, Dropout, try2).html
09/09/2019  01:23 AM         6,446,135 train_Neural_Network (ResNetV1, 96-0.8, no Dropout, try1).html
09/15/2019  01:19 AM         1,707,332 train_Neural_Network.ipynb
09/14/2019  04:08 PM        88,003,828 umbOpnImg-imgs96.npz
09/14/2019  07:39 PM         2,094,090 VGG_Model_Setup.ipynb
09/14/2019  07:39 PM            17,772 VGG_Model_Train_Test.ipynb
              70 File(s)  2,051,203,236 bytes
               6 Dir(s)  84,308,385,792 bytes free
In [5]:
flowers = glob.glob('./data/flr_*.jpg')
fungus = glob.glob('./data/fgs_*.jpg')
rocks = glob.glob('./data/rck_*.jpg')

pixel_flowers = glob.glob('./data/pxl_flower_*.jpeg')
pixel_umbrella = glob.glob('./data/pxl_umbrella_*.jpeg')
print("There are %s, %s flower, %s fungus, %s rock and %s umbrella pictures" %(len(flowers), len(pixel_flowers), len(fungus), len(rocks), len(pixel_umbrella)))
There are 1269, 1792 flower, 856 fungus, 1007 rock and 420 umbrella pictures
In [6]:
# Randomly show 10 examples of the images
from IPython.display import Image
    
dataset = flowers #flowers #fungus #rocks

for i in range(0, 5):
    index = random.randint(0, len(dataset)-1)   
    print("Showing:", dataset[index])
    
    img = mpimg.imread(dataset[index])
    imgplot = plt.imshow(img)
    plt.show()

#Image(dataset[index])
Showing: ./data\flr_00139.jpg
Showing: ./data\flr_01284.jpg
Showing: ./data\flr_00147.jpg
Showing: ./data\flr_01862.jpg
Showing: ./data\flr_01308.jpg

Extract the training and testing datasets

In [7]:
# Load the data
trDatOrg       = np.load('flrnonflr-train-imgs96-0.8+.npz')['arr_0']
trLblOrg       = np.load('flrnonflr-train-labels96-0.8+.npz')['arr_0']
tsDatOrg       = np.load('flrnonflr-test-imgs96-0.8+.npz')['arr_0']
tsLblOrg       = np.load('flrnonflr-test-labels96-0.8+.npz')['arr_0']
In [8]:
print("For the training and test datasets:")
print("The shapes are %s, %s, %s, %s" \
      %(trDatOrg.shape, trLblOrg.shape, tsDatOrg.shape, tsLblOrg.shape))
For the training and test datasets:
The shapes are (14784, 96, 96, 3), (14784,), (3696, 96, 96, 3), (3696,)
In [9]:
# Randomly show 10 examples of the images

data = tsDatOrg
label = tsLblOrg

for i in range(20):
    index = random.randint(0, len(data)-1)
    print("Showing %s index image, It is %s" %(index, label[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 13 index image, It is 1.0
Showing 1110 index image, It is 1.0
Showing 1544 index image, It is 1.0
Showing 1480 index image, It is 1.0
Showing 843 index image, It is 1.0
Showing 2626 index image, It is 0.0
Showing 3276 index image, It is 0.0
Showing 3672 index image, It is 0.0
Showing 2213 index image, It is 1.0
Showing 526 index image, It is 1.0
Showing 2219 index image, It is 1.0
Showing 2019 index image, It is 1.0
Showing 1636 index image, It is 1.0
Showing 898 index image, It is 1.0
Showing 3218 index image, It is 0.0
Showing 3085 index image, It is 0.0
Showing 2817 index image, It is 0.0
Showing 3306 index image, It is 0.0
Showing 1514 index image, It is 1.0
Showing 1200 index image, It is 1.0
In [10]:
# Convert the data into 'float32'
# Rescale the values from 0~255 to 0~1
trDat       = trDatOrg.astype('float32')/255
tsDat       = tsDatOrg.astype('float32')/255

# Retrieve the row size of each image
# Retrieve the column size of each image
imgrows     = trDat.shape[1]
imgclms     = trDat.shape[2]
channel     = 3

# # reshape the data to be [samples][width][height][channel]
# # This is required by Keras framework
# trDat       = trDat.reshape(trDat.shape[0], imgrows, imgclms, channel)
# tsDat       = tsDat.reshape(tsDat.shape[0], imgrows, imgclms, channel)

# Perform one hot encoding on the labels
# Retrieve the number of classes in this problem
trLbl       = to_categorical(trLblOrg)
tsLbl       = to_categorical(tsLblOrg)
num_classes = tsLbl.shape[1]
In [11]:
# fix random seed for reproducibility
seed = 29
np.random.seed(seed)


modelname = 'FlowerPower'

#optmz = optimizers.Adam(lr=0.001)
optmz = optimizers.RMSprop(lr=0.001)
In [12]:
# Baseline Model -> func: createBaselineModel()

def createBaselineModel():
    inputs = Input(shape=(imgrows, imgclms, channel))
    x = Conv2D(30, (4, 4), activation='relu')(inputs)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Conv2D(50, (4, 4), activation='relu')(x)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Dropout(0.3)(x)
    x = Flatten()(x)
    x = Dense(32, activation='relu')(x)
    x = Dense(num_classes, activation='softmax')(x)
    
    model = Model(inputs=[inputs],outputs=x)
    
    model.compile(loss='categorical_crossentropy', 
                  optimizer='adam',
                  metrics=['accuracy'])
    return model
In [13]:
# ResNetV1 -> func: createResNetV1()
def resLyr(inputs,
           numFilters=16,
           kernelSz=3,
           strides=1,
           activation='relu',
           batchNorm=True,
           convFirst=True,
           lyrName=None):
    convLyr = Conv2D(numFilters, kernel_size=kernelSz, strides=strides, 
                     padding='same', kernel_initializer='he_normal', 
                     kernel_regularizer=l2(1e-4), 
                     name=lyrName+'_conv' if lyrName else None)
    x = inputs
    if convFirst:
        x = convLyr(x)
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation,name=lyrName+'_'+activation if lyrName else None)(x)
    else:
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation, name=lyrName+'_'+activation if lyrName else None)(x)
        x = convLyr(x)
    return x


def resBlkV1(inputs,
             numFilters=16,
             numBlocks=3,
             downsampleOnFirst=True,
             names=None):
    x = inputs
    for run in range(0,numBlocks):
        strides = 1
        blkStr = str(run+1)
        if downsampleOnFirst and run == 0:
            strides = 2
        y = resLyr(inputs=x, numFilters=numFilters, strides=strides,
                   lyrName=names+'_Blk'+blkStr+'_Res1' if names else None)
        y = resLyr(inputs=y, numFilters=numFilters, activation=None,
                   lyrName=names+'_Blk'+blkStr+'_Res2' if names else None)
        if downsampleOnFirst and run == 0:
            x = resLyr(inputs=x, numFilters=numFilters, kernelSz=1,
                       strides=strides, activation=None, batchNorm=False,
                       lyrName=names+'_Blk'+blkStr+'_lin' if names else None)
        x = add([x,y], name=names+'_Blk'+blkStr+'_add' if names else None)
        x = Activation('relu', name=names+'_Blk'+blkStr+'_relu' if names else None)(x)
    return x

def createResNetV1(inputShape=(imgrows, imgclms, channel),
                   numClasses=2):
    inputs = Input(shape=inputShape)
    v = resLyr(inputs, lyrName='Inpt')
    v = resBlkV1(inputs=v, numFilters=16, numBlocks=3,
                 downsampleOnFirst=False, names='Stg1')
    v = Dropout(0.30)(v)
    v = resBlkV1(inputs=v, numFilters=32, numBlocks=3,
                 downsampleOnFirst=True, names='Stg2')
    v = Dropout(0.40)(v)
    v = resBlkV1(inputs=v, numFilters=64, numBlocks=3,
                 downsampleOnFirst=True, names='Stg3')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=128, numBlocks=3,
                 downsampleOnFirst=True, names='Stg4')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=128, numBlocks=3,
                 downsampleOnFirst=False, names='Stg5')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=256, numBlocks=3,
                 downsampleOnFirst=True, names='Stg6')
    v = Dropout(0.50)(v)
    v = AveragePooling2D(pool_size=6, name='AvgPool')(v)
    v = Flatten()(v) 
    outputs = Dense(numClasses, activation='softmax', 
                    kernel_initializer='he_normal')(v)
    model = Model(inputs=inputs, outputs=outputs)
    model.compile(loss='categorical_crossentropy', 
                  optimizer=optmz, 
                  metrics=['accuracy'])
    return model
In [23]:
# Original # Inception-v4 -> func: create_inception_v4()

def stem_block(inputs,
         names=None):
    x = inputs
    
    x = Conv2D(filters=32, kernel_size=(3, 3), strides=2, padding='valid')(inputs)
    x = Conv2D(filters=32, kernel_size=(3, 3), strides=1, padding='valid')(x)
    x = Conv2D(filters=64, kernel_size=(3, 3), strides=1, padding='same')(x)
    x_L1_1 = MaxPooling2D(pool_size=(3, 3), strides=2, padding='valid')(x)
    x_R1_1 = Conv2D(filters=96, kernel_size=(3, 3), strides=2, padding='valid')(x)
    x = concatenate([x_L1_1, x_R1_1])
    
    x_L2_1 = Conv2D(filters=64, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_L2_2 = Conv2D(filters=96, kernel_size=(3, 3), strides=1, padding='valid')(x_L2_1)
    x_R2_1 = Conv2D(filters=64, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_R2_2 = Conv2D(filters=64, kernel_size=(7, 1), strides=1, padding='same')(x_R2_1)
    x_R2_3 = Conv2D(filters=64, kernel_size=(1, 7), strides=1, padding='same')(x_R2_2)
    x_R2_4 = Conv2D(filters=96, kernel_size=(3, 3), strides=1, padding='valid')(x_R2_3)
    x = concatenate([x_L2_2, x_R2_4])
    
    x_L3_1 = Conv2D(filters=192, kernel_size=(3, 3), strides=2, padding='valid')(x)
    x_L3_2 = ZeroPadding2D(padding=((0,1), (0,1)))(x_L3_1) # Added due to size mismatch
    x_R3_1 = MaxPooling2D(strides=2, padding='valid')(x)
    x = concatenate([x_L3_2, x_R3_1])
    return x

def inception_a_block(inputs,
         names=None):
    x = inputs
    
    x_EL1_1 = AveragePooling2D(pool_size=(1, 1), padding='same')(x)
    x_EL1_2 = Conv2D(filters=96, kernel_size=(1, 1), strides=1, padding='same')(x_EL1_1)
    
    x_ML1_1 = Conv2D(filters=96, kernel_size=(1, 1), strides=1, padding='same')(x)
    
    x_MR1_1 = Conv2D(filters=64, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_MR1_2 = Conv2D(filters=96, kernel_size=(3, 3), strides=1, padding='same')(x_MR1_1)
    
    x_ER1_1 = Conv2D(filters=64, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_ER1_2 = Conv2D(filters=96, kernel_size=(3, 3), strides=1, padding='same')(x_ER1_1)
    x_ER1_3 = Conv2D(filters=96, kernel_size=(3, 3), strides=1, padding='same')(x_ER1_2)
    
    x = concatenate([x_EL1_2, x_ML1_1, x_MR1_2, x_ER1_3])
    return x

def inception_b_block(inputs,
         names=None):
    x = inputs
    
    x_EL1_1 = AveragePooling2D(pool_size=(1, 1), padding='same')(x)
    x_EL1_2 = Conv2D(filters=128, kernel_size=(1, 1), strides=1, padding='same')(x_EL1_1)
    
    x_ML1_1 = Conv2D(filters=384, kernel_size=(1, 1), strides=1, padding='same')(x)
    
    x_MR1_1 = Conv2D(filters=192, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_MR1_2 = Conv2D(filters=224, kernel_size=(1, 7), strides=1, padding='same')(x_MR1_1)
    x_MR1_3 = Conv2D(filters=256, kernel_size=(1, 7), strides=1, padding='same')(x_MR1_2)
    
    x_ER1_1 = Conv2D(filters=192, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_ER1_2 = Conv2D(filters=192, kernel_size=(1, 7), strides=1, padding='same')(x_ER1_1)
    x_ER1_3 = Conv2D(filters=224, kernel_size=(7, 1), strides=1, padding='same')(x_ER1_2)
    x_ER1_4 = Conv2D(filters=224, kernel_size=(1, 7), strides=1, padding='same')(x_ER1_3)
    x_ER1_5 = Conv2D(filters=256, kernel_size=(7, 1), strides=1, padding='same')(x_ER1_4)

    x = concatenate([x_EL1_2, x_ML1_1, x_MR1_3, x_ER1_5])
    return x

def inception_c_block(inputs,
         names=None):
    x = inputs
    
    x_EL1_1 = AveragePooling2D(pool_size=(1, 1), padding='same')(x)
    x_EL1_2 = Conv2D(filters=256, kernel_size=(1, 1), strides=1, padding='same')(x_EL1_1)
    
    x_ML1_1 = Conv2D(filters=256, kernel_size=(1, 1), strides=1, padding='same')(x)
    
    x_MR1_1 = Conv2D(filters=384, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_MR1_1_L2 = Conv2D(filters=256, kernel_size=(1, 3), strides=1, padding='same')(x_MR1_1)
    x_MR1_1_R2 = Conv2D(filters=256, kernel_size=(3, 1), strides=1, padding='same')(x_MR1_1)
    
    x_ER1_1 = Conv2D(filters=384, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_ER1_2 = Conv2D(filters=448, kernel_size=(1, 3), strides=1, padding='same')(x_ER1_1)
    x_ER1_3 = Conv2D(filters=512, kernel_size=(3, 1), strides=1, padding='same')(x_ER1_2)
    x_ER1_3_L1 = Conv2D(filters=256, kernel_size=(3, 1), strides=1, padding='same')(x_ER1_3)
    x_ER1_3_R1 = Conv2D(filters=256, kernel_size=(1, 3), strides=1, padding='same')(x_ER1_3)
    
    x = concatenate([x_EL1_2, x_ML1_1, x_MR1_1_L2, x_MR1_1_R2, x_ER1_3_L1, x_ER1_3_R1])
    return x

def reduction_a_block(inputs,
                      network_selected="Inception-v4",
                      names=None):
    if network_selected == "Inception-v4":
        k, l, m, n = 192, 224, 256, 384
    elif network_selected == "Inception-ResNet-v1":
        k, l, m, n = 192, 192, 256, 384
    elif network_selected == "Inception-ResNet-v2":
        k, l, m, n = 256, 256, 384, 384
        
    x = inputs
    
    x_L_1 = MaxPooling2D(pool_size=(3, 3), strides=2, padding='valid')(x)
    
    x_M_1 = Conv2D(filters=n, kernel_size=(3, 3), strides=2, padding='valid')(x)
    
    x_R_1 = Conv2D(filters=k, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_R_2 = Conv2D(filters=l, kernel_size=(3, 3), strides=1, padding='same')(x_R_1)
    x_R_3 = Conv2D(filters=m, kernel_size=(3, 3), strides=2, padding='valid')(x_R_2)
    
    x = concatenate([x_L_1, x_M_1, x_R_3])
    return x

def reduction_b_block(inputs,
                      names=None):

        
    x = inputs
    
    x_L_1 = MaxPooling2D(pool_size=(3, 3), strides=2, padding='valid')(x)
    
    x_M_1 = Conv2D(filters=192, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_M_2 = Conv2D(filters=192, kernel_size=(3, 3), strides=2, padding='valid')(x_M_1)
    
    x_R_1 = Conv2D(filters=256, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_R_2 = Conv2D(filters=256, kernel_size=(1, 7), strides=1, padding='same')(x_R_1)
    x_R_3 = Conv2D(filters=320, kernel_size=(7, 1), strides=1, padding='same')(x_R_2)
    x_R_4 = Conv2D(filters=320, kernel_size=(3, 3), strides=2, padding='valid')(x_R_3)
    
    x = concatenate([x_L_1, x_M_2, x_R_4])
    return x

def create_inception_v4(inputShape=(imgrows, imgclms, channel),
                   num_classes=2):
    NETWORK_SELECTED = "Inception-v4"
    inputs = Input(shape=inputShape)
    x = stem_block(inputs)
    x = inception_a_block(x)
    x = inception_a_block(x)
    x = inception_a_block(x)
    x = inception_a_block(x)
    x = reduction_a_block(x, network_selected=NETWORK_SELECTED)
    
    x = inception_b_block(x)
    x = inception_b_block(x)
    x = inception_b_block(x)
    x = inception_b_block(x)
    x = inception_b_block(x)
    x = inception_b_block(x)
    x = inception_b_block(x)
    x = reduction_b_block(x)
    
    x = inception_c_block(x)
    x = inception_c_block(x)
    x = inception_c_block(x)
    
    x = AveragePooling2D(pool_size=(1,1))(x) # Added (1,1) due to negative dimension
    x = Flatten()(x)
    x = Dense(1536)(x) # Changed
    x = Dropout(0.2)(x)
    outputs = Dense(num_classes, activation='softmax', 
                    kernel_initializer='he_normal')(x)
    
    model = Model(inputs=inputs, outputs=outputs)
    model.compile(loss='categorical_crossentropy', 
                  optimizer="Adam", 
                  metrics=['accuracy'])
    return model
In [31]:
# Modified # Inception-v4 -> func: create_inception_v4()

def stem_block(inputs,
         names=None):
    x = inputs
    
    x = Conv2D(filters=16, kernel_size=(3, 3), strides=2, padding='valid')(inputs)
    x = Conv2D(filters=16, kernel_size=(3, 3), strides=1, padding='valid')(x)
    x = Conv2D(filters=32, kernel_size=(3, 3), strides=1, padding='same')(x)
    x_L1_1 = MaxPooling2D(pool_size=(3, 3), strides=2, padding='valid')(x)
    x_R1_1 = Conv2D(filters=48, kernel_size=(3, 3), strides=2, padding='valid')(x)
    x = concatenate([x_L1_1, x_R1_1])
    
    x_L2_1 = Conv2D(filters=32, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_L2_2 = Conv2D(filters=48, kernel_size=(3, 3), strides=1, padding='valid')(x_L2_1)
    x_R2_1 = Conv2D(filters=32, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_R2_2 = Conv2D(filters=32, kernel_size=(7, 1), strides=1, padding='same')(x_R2_1)
    x_R2_3 = Conv2D(filters=32, kernel_size=(1, 7), strides=1, padding='same')(x_R2_2)
    x_R2_4 = Conv2D(filters=48, kernel_size=(3, 3), strides=1, padding='valid')(x_R2_3)
    x = concatenate([x_L2_2, x_R2_4])
    
    x_L3_1 = Conv2D(filters=96, kernel_size=(3, 3), strides=2, padding='valid')(x)
    x_L3_2 = ZeroPadding2D(padding=((0,1), (0,1)))(x_L3_1) # Added due to size mismatch
    x_R3_1 = MaxPooling2D(strides=2, padding='valid')(x)
    x = concatenate([x_L3_2, x_R3_1])
    return x

def inception_a_block(inputs,
         names=None):
    x = inputs
    
    x_EL1_1 = AveragePooling2D(pool_size=(1, 1), padding='same')(x)
    x_EL1_2 = Conv2D(filters=48, kernel_size=(1, 1), strides=1, padding='same')(x_EL1_1)
    
    x_ML1_1 = Conv2D(filters=48, kernel_size=(1, 1), strides=1, padding='same')(x)
    
    x_MR1_1 = Conv2D(filters=32, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_MR1_2 = Conv2D(filters=48, kernel_size=(3, 3), strides=1, padding='same')(x_MR1_1)
    
    x_ER1_1 = Conv2D(filters=32, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_ER1_2 = Conv2D(filters=48, kernel_size=(3, 3), strides=1, padding='same')(x_ER1_1)
    x_ER1_3 = Conv2D(filters=48, kernel_size=(3, 3), strides=1, padding='same')(x_ER1_2)
    
    x = concatenate([x_EL1_2, x_ML1_1, x_MR1_2, x_ER1_3])
    return x

def inception_b_block(inputs,
         names=None):
    x = inputs
    
    x_EL1_1 = AveragePooling2D(pool_size=(1, 1), padding='same')(x)
    x_EL1_2 = Conv2D(filters=64, kernel_size=(1, 1), strides=1, padding='same')(x_EL1_1)
    
    x_ML1_1 = Conv2D(filters=192, kernel_size=(1, 1), strides=1, padding='same')(x)
    
    x_MR1_1 = Conv2D(filters=96, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_MR1_2 = Conv2D(filters=112, kernel_size=(1, 7), strides=1, padding='same')(x_MR1_1)
    x_MR1_3 = Conv2D(filters=128, kernel_size=(1, 7), strides=1, padding='same')(x_MR1_2)
    
    x_ER1_1 = Conv2D(filters=96, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_ER1_2 = Conv2D(filters=96, kernel_size=(1, 7), strides=1, padding='same')(x_ER1_1)
    x_ER1_3 = Conv2D(filters=112, kernel_size=(7, 1), strides=1, padding='same')(x_ER1_2)
    x_ER1_4 = Conv2D(filters=112, kernel_size=(1, 7), strides=1, padding='same')(x_ER1_3)
    x_ER1_5 = Conv2D(filters=128, kernel_size=(7, 1), strides=1, padding='same')(x_ER1_4)

    x = concatenate([x_EL1_2, x_ML1_1, x_MR1_3, x_ER1_5])
    return x

def inception_c_block(inputs,
         names=None):
    x = inputs
    
    x_EL1_1 = AveragePooling2D(pool_size=(1, 1), padding='same')(x)
    x_EL1_2 = Conv2D(filters=128, kernel_size=(1, 1), strides=1, padding='same')(x_EL1_1)
    
    x_ML1_1 = Conv2D(filters=128, kernel_size=(1, 1), strides=1, padding='same')(x)
    
    x_MR1_1 = Conv2D(filters=192, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_MR1_1_L2 = Conv2D(filters=128, kernel_size=(1, 3), strides=1, padding='same')(x_MR1_1)
    x_MR1_1_R2 = Conv2D(filters=128, kernel_size=(3, 1), strides=1, padding='same')(x_MR1_1)
    
    x_ER1_1 = Conv2D(filters=192, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_ER1_2 = Conv2D(filters=224, kernel_size=(1, 3), strides=1, padding='same')(x_ER1_1)
    x_ER1_3 = Conv2D(filters=256, kernel_size=(3, 1), strides=1, padding='same')(x_ER1_2)
    x_ER1_3_L1 = Conv2D(filters=128, kernel_size=(3, 1), strides=1, padding='same')(x_ER1_3)
    x_ER1_3_R1 = Conv2D(filters=128, kernel_size=(1, 3), strides=1, padding='same')(x_ER1_3)
    
    x = concatenate([x_EL1_2, x_ML1_1, x_MR1_1_L2, x_MR1_1_R2, x_ER1_3_L1, x_ER1_3_R1])
    return x

def reduction_a_block(inputs,
                      network_selected="Inception-v4",
                      names=None):
    if network_selected == "Inception-v4":
        k, l, m, n = 96, 112, 128, 192
    elif network_selected == "Inception-ResNet-v1":
        k, l, m, n = 96, 96, 128, 192
    elif network_selected == "Inception-ResNet-v2":
        k, l, m, n = 128, 128, 192, 192
        
    x = inputs
    
    x_L_1 = MaxPooling2D(pool_size=(3, 3), strides=2, padding='valid')(x)
    
    x_M_1 = Conv2D(filters=n, kernel_size=(3, 3), strides=2, padding='valid')(x)
    
    x_R_1 = Conv2D(filters=k, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_R_2 = Conv2D(filters=l, kernel_size=(3, 3), strides=1, padding='same')(x_R_1)
    x_R_3 = Conv2D(filters=m, kernel_size=(3, 3), strides=2, padding='valid')(x_R_2)
    
    x = concatenate([x_L_1, x_M_1, x_R_3])
    return x

def reduction_b_block(inputs,
                      names=None):

        
    x = inputs
    
    x_L_1 = MaxPooling2D(pool_size=(3, 3), strides=2, padding='valid')(x)
    
    x_M_1 = Conv2D(filters=96, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_M_2 = Conv2D(filters=96, kernel_size=(3, 3), strides=2, padding='valid')(x_M_1)
    
    x_R_1 = Conv2D(filters=128, kernel_size=(1, 1), strides=1, padding='same')(x)
    x_R_2 = Conv2D(filters=128, kernel_size=(1, 7), strides=1, padding='same')(x_R_1)
    x_R_3 = Conv2D(filters=160, kernel_size=(7, 1), strides=1, padding='same')(x_R_2)
    x_R_4 = Conv2D(filters=160, kernel_size=(3, 3), strides=2, padding='valid')(x_R_3)
    
    x = concatenate([x_L_1, x_M_2, x_R_4])
    return x

def create_inception_v4(inputShape=(imgrows, imgclms, channel),
                   num_classes=2):
    NETWORK_SELECTED = "Inception-v4"
    inputs = Input(shape=inputShape)
    x = stem_block(inputs)
    x = BatchNormalization()(x)
    x = inception_a_block(x)
    x = inception_a_block(x)
    x = BatchNormalization()(x)
    x = inception_a_block(x)
    x = inception_a_block(x)
    x = BatchNormalization()(x)
    x = reduction_a_block(x, network_selected=NETWORK_SELECTED)
    
    x = BatchNormalization()(x)
    x = inception_b_block(x)
    x = inception_b_block(x)
    x = BatchNormalization()(x)
    x = inception_b_block(x)
    x = inception_b_block(x)
    x = BatchNormalization()(x)
    x = inception_b_block(x)
    x = inception_b_block(x)
    x = BatchNormalization()(x)
    x = inception_b_block(x)
    x = BatchNormalization()(x)
    x = reduction_b_block(x)
    
    x = BatchNormalization()(x)
    x = inception_c_block(x)
    x = inception_c_block(x)
    x = BatchNormalization()(x)
    x = inception_c_block(x)
    x = BatchNormalization()(x)
    x = AveragePooling2D(pool_size=(1,1))(x) # Added (1,1) due to negative dimension
    x = Flatten()(x)
    x = Dense(256)(x)
    x = Dropout(0.2)(x)
    outputs = Dense(num_classes, activation='softmax', 
                    kernel_initializer='he_normal')(x)
    
    model = Model(inputs=inputs, outputs=outputs)
    model.compile(loss='categorical_crossentropy', 
                  optimizer=optmz, 
                  metrics=['accuracy'])
    return model
In [32]:
# Setup the models
model       = create_inception_v4() # This is meant for training
modelGo     = create_inception_v4() # This is used for final testing

model.summary()
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_9 (InputLayer)            (None, 96, 96, 3)    0                                            
__________________________________________________________________________________________________
conv2d_1168 (Conv2D)            (None, 47, 47, 16)   448         input_9[0][0]                    
__________________________________________________________________________________________________
conv2d_1169 (Conv2D)            (None, 45, 45, 16)   2320        conv2d_1168[0][0]                
__________________________________________________________________________________________________
conv2d_1170 (Conv2D)            (None, 45, 45, 32)   4640        conv2d_1169[0][0]                
__________________________________________________________________________________________________
max_pooling2d_28 (MaxPooling2D) (None, 22, 22, 32)   0           conv2d_1170[0][0]                
__________________________________________________________________________________________________
conv2d_1171 (Conv2D)            (None, 22, 22, 48)   13872       conv2d_1170[0][0]                
__________________________________________________________________________________________________
concatenate_148 (Concatenate)   (None, 22, 22, 80)   0           max_pooling2d_28[0][0]           
                                                                 conv2d_1171[0][0]                
__________________________________________________________________________________________________
conv2d_1174 (Conv2D)            (None, 22, 22, 32)   2592        concatenate_148[0][0]            
__________________________________________________________________________________________________
conv2d_1175 (Conv2D)            (None, 22, 22, 32)   7200        conv2d_1174[0][0]                
__________________________________________________________________________________________________
conv2d_1172 (Conv2D)            (None, 22, 22, 32)   2592        concatenate_148[0][0]            
__________________________________________________________________________________________________
conv2d_1176 (Conv2D)            (None, 22, 22, 32)   7200        conv2d_1175[0][0]                
__________________________________________________________________________________________________
conv2d_1173 (Conv2D)            (None, 20, 20, 48)   13872       conv2d_1172[0][0]                
__________________________________________________________________________________________________
conv2d_1177 (Conv2D)            (None, 20, 20, 48)   13872       conv2d_1176[0][0]                
__________________________________________________________________________________________________
concatenate_149 (Concatenate)   (None, 20, 20, 96)   0           conv2d_1173[0][0]                
                                                                 conv2d_1177[0][0]                
__________________________________________________________________________________________________
conv2d_1178 (Conv2D)            (None, 9, 9, 96)     83040       concatenate_149[0][0]            
__________________________________________________________________________________________________
zero_padding2d_8 (ZeroPadding2D (None, 10, 10, 96)   0           conv2d_1178[0][0]                
__________________________________________________________________________________________________
max_pooling2d_29 (MaxPooling2D) (None, 10, 10, 96)   0           concatenate_149[0][0]            
__________________________________________________________________________________________________
concatenate_150 (Concatenate)   (None, 10, 10, 192)  0           zero_padding2d_8[0][0]           
                                                                 max_pooling2d_29[0][0]           
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, 10, 10, 192)  768         concatenate_150[0][0]            
__________________________________________________________________________________________________
conv2d_1183 (Conv2D)            (None, 10, 10, 32)   6176        batch_normalization_12[0][0]     
__________________________________________________________________________________________________
average_pooling2d_120 (AverageP (None, 10, 10, 192)  0           batch_normalization_12[0][0]     
__________________________________________________________________________________________________
conv2d_1181 (Conv2D)            (None, 10, 10, 32)   6176        batch_normalization_12[0][0]     
__________________________________________________________________________________________________
conv2d_1184 (Conv2D)            (None, 10, 10, 48)   13872       conv2d_1183[0][0]                
__________________________________________________________________________________________________
conv2d_1179 (Conv2D)            (None, 10, 10, 48)   9264        average_pooling2d_120[0][0]      
__________________________________________________________________________________________________
conv2d_1180 (Conv2D)            (None, 10, 10, 48)   9264        batch_normalization_12[0][0]     
__________________________________________________________________________________________________
conv2d_1182 (Conv2D)            (None, 10, 10, 48)   13872       conv2d_1181[0][0]                
__________________________________________________________________________________________________
conv2d_1185 (Conv2D)            (None, 10, 10, 48)   20784       conv2d_1184[0][0]                
__________________________________________________________________________________________________
concatenate_151 (Concatenate)   (None, 10, 10, 192)  0           conv2d_1179[0][0]                
                                                                 conv2d_1180[0][0]                
                                                                 conv2d_1182[0][0]                
                                                                 conv2d_1185[0][0]                
__________________________________________________________________________________________________
conv2d_1190 (Conv2D)            (None, 10, 10, 32)   6176        concatenate_151[0][0]            
__________________________________________________________________________________________________
average_pooling2d_121 (AverageP (None, 10, 10, 192)  0           concatenate_151[0][0]            
__________________________________________________________________________________________________
conv2d_1188 (Conv2D)            (None, 10, 10, 32)   6176        concatenate_151[0][0]            
__________________________________________________________________________________________________
conv2d_1191 (Conv2D)            (None, 10, 10, 48)   13872       conv2d_1190[0][0]                
__________________________________________________________________________________________________
conv2d_1186 (Conv2D)            (None, 10, 10, 48)   9264        average_pooling2d_121[0][0]      
__________________________________________________________________________________________________
conv2d_1187 (Conv2D)            (None, 10, 10, 48)   9264        concatenate_151[0][0]            
__________________________________________________________________________________________________
conv2d_1189 (Conv2D)            (None, 10, 10, 48)   13872       conv2d_1188[0][0]                
__________________________________________________________________________________________________
conv2d_1192 (Conv2D)            (None, 10, 10, 48)   20784       conv2d_1191[0][0]                
__________________________________________________________________________________________________
concatenate_152 (Concatenate)   (None, 10, 10, 192)  0           conv2d_1186[0][0]                
                                                                 conv2d_1187[0][0]                
                                                                 conv2d_1189[0][0]                
                                                                 conv2d_1192[0][0]                
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, 10, 10, 192)  768         concatenate_152[0][0]            
__________________________________________________________________________________________________
conv2d_1197 (Conv2D)            (None, 10, 10, 32)   6176        batch_normalization_13[0][0]     
__________________________________________________________________________________________________
average_pooling2d_122 (AverageP (None, 10, 10, 192)  0           batch_normalization_13[0][0]     
__________________________________________________________________________________________________
conv2d_1195 (Conv2D)            (None, 10, 10, 32)   6176        batch_normalization_13[0][0]     
__________________________________________________________________________________________________
conv2d_1198 (Conv2D)            (None, 10, 10, 48)   13872       conv2d_1197[0][0]                
__________________________________________________________________________________________________
conv2d_1193 (Conv2D)            (None, 10, 10, 48)   9264        average_pooling2d_122[0][0]      
__________________________________________________________________________________________________
conv2d_1194 (Conv2D)            (None, 10, 10, 48)   9264        batch_normalization_13[0][0]     
__________________________________________________________________________________________________
conv2d_1196 (Conv2D)            (None, 10, 10, 48)   13872       conv2d_1195[0][0]                
__________________________________________________________________________________________________
conv2d_1199 (Conv2D)            (None, 10, 10, 48)   20784       conv2d_1198[0][0]                
__________________________________________________________________________________________________
concatenate_153 (Concatenate)   (None, 10, 10, 192)  0           conv2d_1193[0][0]                
                                                                 conv2d_1194[0][0]                
                                                                 conv2d_1196[0][0]                
                                                                 conv2d_1199[0][0]                
__________________________________________________________________________________________________
conv2d_1204 (Conv2D)            (None, 10, 10, 32)   6176        concatenate_153[0][0]            
__________________________________________________________________________________________________
average_pooling2d_123 (AverageP (None, 10, 10, 192)  0           concatenate_153[0][0]            
__________________________________________________________________________________________________
conv2d_1202 (Conv2D)            (None, 10, 10, 32)   6176        concatenate_153[0][0]            
__________________________________________________________________________________________________
conv2d_1205 (Conv2D)            (None, 10, 10, 48)   13872       conv2d_1204[0][0]                
__________________________________________________________________________________________________
conv2d_1200 (Conv2D)            (None, 10, 10, 48)   9264        average_pooling2d_123[0][0]      
__________________________________________________________________________________________________
conv2d_1201 (Conv2D)            (None, 10, 10, 48)   9264        concatenate_153[0][0]            
__________________________________________________________________________________________________
conv2d_1203 (Conv2D)            (None, 10, 10, 48)   13872       conv2d_1202[0][0]                
__________________________________________________________________________________________________
conv2d_1206 (Conv2D)            (None, 10, 10, 48)   20784       conv2d_1205[0][0]                
__________________________________________________________________________________________________
concatenate_154 (Concatenate)   (None, 10, 10, 192)  0           conv2d_1200[0][0]                
                                                                 conv2d_1201[0][0]                
                                                                 conv2d_1203[0][0]                
                                                                 conv2d_1206[0][0]                
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, 10, 10, 192)  768         concatenate_154[0][0]            
__________________________________________________________________________________________________
conv2d_1208 (Conv2D)            (None, 10, 10, 96)   18528       batch_normalization_14[0][0]     
__________________________________________________________________________________________________
conv2d_1209 (Conv2D)            (None, 10, 10, 112)  96880       conv2d_1208[0][0]                
__________________________________________________________________________________________________
max_pooling2d_30 (MaxPooling2D) (None, 4, 4, 192)    0           batch_normalization_14[0][0]     
__________________________________________________________________________________________________
conv2d_1207 (Conv2D)            (None, 4, 4, 192)    331968      batch_normalization_14[0][0]     
__________________________________________________________________________________________________
conv2d_1210 (Conv2D)            (None, 4, 4, 128)    129152      conv2d_1209[0][0]                
__________________________________________________________________________________________________
concatenate_155 (Concatenate)   (None, 4, 4, 512)    0           max_pooling2d_30[0][0]           
                                                                 conv2d_1207[0][0]                
                                                                 conv2d_1210[0][0]                
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, 4, 4, 512)    2048        concatenate_155[0][0]            
__________________________________________________________________________________________________
conv2d_1216 (Conv2D)            (None, 4, 4, 96)     49248       batch_normalization_15[0][0]     
__________________________________________________________________________________________________
conv2d_1217 (Conv2D)            (None, 4, 4, 96)     64608       conv2d_1216[0][0]                
__________________________________________________________________________________________________
conv2d_1213 (Conv2D)            (None, 4, 4, 96)     49248       batch_normalization_15[0][0]     
__________________________________________________________________________________________________
conv2d_1218 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1217[0][0]                
__________________________________________________________________________________________________
average_pooling2d_124 (AverageP (None, 4, 4, 512)    0           batch_normalization_15[0][0]     
__________________________________________________________________________________________________
conv2d_1214 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1213[0][0]                
__________________________________________________________________________________________________
conv2d_1219 (Conv2D)            (None, 4, 4, 112)    87920       conv2d_1218[0][0]                
__________________________________________________________________________________________________
conv2d_1211 (Conv2D)            (None, 4, 4, 64)     32832       average_pooling2d_124[0][0]      
__________________________________________________________________________________________________
conv2d_1212 (Conv2D)            (None, 4, 4, 192)    98496       batch_normalization_15[0][0]     
__________________________________________________________________________________________________
conv2d_1215 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1214[0][0]                
__________________________________________________________________________________________________
conv2d_1220 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1219[0][0]                
__________________________________________________________________________________________________
concatenate_156 (Concatenate)   (None, 4, 4, 512)    0           conv2d_1211[0][0]                
                                                                 conv2d_1212[0][0]                
                                                                 conv2d_1215[0][0]                
                                                                 conv2d_1220[0][0]                
__________________________________________________________________________________________________
conv2d_1226 (Conv2D)            (None, 4, 4, 96)     49248       concatenate_156[0][0]            
__________________________________________________________________________________________________
conv2d_1227 (Conv2D)            (None, 4, 4, 96)     64608       conv2d_1226[0][0]                
__________________________________________________________________________________________________
conv2d_1223 (Conv2D)            (None, 4, 4, 96)     49248       concatenate_156[0][0]            
__________________________________________________________________________________________________
conv2d_1228 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1227[0][0]                
__________________________________________________________________________________________________
average_pooling2d_125 (AverageP (None, 4, 4, 512)    0           concatenate_156[0][0]            
__________________________________________________________________________________________________
conv2d_1224 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1223[0][0]                
__________________________________________________________________________________________________
conv2d_1229 (Conv2D)            (None, 4, 4, 112)    87920       conv2d_1228[0][0]                
__________________________________________________________________________________________________
conv2d_1221 (Conv2D)            (None, 4, 4, 64)     32832       average_pooling2d_125[0][0]      
__________________________________________________________________________________________________
conv2d_1222 (Conv2D)            (None, 4, 4, 192)    98496       concatenate_156[0][0]            
__________________________________________________________________________________________________
conv2d_1225 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1224[0][0]                
__________________________________________________________________________________________________
conv2d_1230 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1229[0][0]                
__________________________________________________________________________________________________
concatenate_157 (Concatenate)   (None, 4, 4, 512)    0           conv2d_1221[0][0]                
                                                                 conv2d_1222[0][0]                
                                                                 conv2d_1225[0][0]                
                                                                 conv2d_1230[0][0]                
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, 4, 4, 512)    2048        concatenate_157[0][0]            
__________________________________________________________________________________________________
conv2d_1236 (Conv2D)            (None, 4, 4, 96)     49248       batch_normalization_16[0][0]     
__________________________________________________________________________________________________
conv2d_1237 (Conv2D)            (None, 4, 4, 96)     64608       conv2d_1236[0][0]                
__________________________________________________________________________________________________
conv2d_1233 (Conv2D)            (None, 4, 4, 96)     49248       batch_normalization_16[0][0]     
__________________________________________________________________________________________________
conv2d_1238 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1237[0][0]                
__________________________________________________________________________________________________
average_pooling2d_126 (AverageP (None, 4, 4, 512)    0           batch_normalization_16[0][0]     
__________________________________________________________________________________________________
conv2d_1234 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1233[0][0]                
__________________________________________________________________________________________________
conv2d_1239 (Conv2D)            (None, 4, 4, 112)    87920       conv2d_1238[0][0]                
__________________________________________________________________________________________________
conv2d_1231 (Conv2D)            (None, 4, 4, 64)     32832       average_pooling2d_126[0][0]      
__________________________________________________________________________________________________
conv2d_1232 (Conv2D)            (None, 4, 4, 192)    98496       batch_normalization_16[0][0]     
__________________________________________________________________________________________________
conv2d_1235 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1234[0][0]                
__________________________________________________________________________________________________
conv2d_1240 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1239[0][0]                
__________________________________________________________________________________________________
concatenate_158 (Concatenate)   (None, 4, 4, 512)    0           conv2d_1231[0][0]                
                                                                 conv2d_1232[0][0]                
                                                                 conv2d_1235[0][0]                
                                                                 conv2d_1240[0][0]                
__________________________________________________________________________________________________
conv2d_1246 (Conv2D)            (None, 4, 4, 96)     49248       concatenate_158[0][0]            
__________________________________________________________________________________________________
conv2d_1247 (Conv2D)            (None, 4, 4, 96)     64608       conv2d_1246[0][0]                
__________________________________________________________________________________________________
conv2d_1243 (Conv2D)            (None, 4, 4, 96)     49248       concatenate_158[0][0]            
__________________________________________________________________________________________________
conv2d_1248 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1247[0][0]                
__________________________________________________________________________________________________
average_pooling2d_127 (AverageP (None, 4, 4, 512)    0           concatenate_158[0][0]            
__________________________________________________________________________________________________
conv2d_1244 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1243[0][0]                
__________________________________________________________________________________________________
conv2d_1249 (Conv2D)            (None, 4, 4, 112)    87920       conv2d_1248[0][0]                
__________________________________________________________________________________________________
conv2d_1241 (Conv2D)            (None, 4, 4, 64)     32832       average_pooling2d_127[0][0]      
__________________________________________________________________________________________________
conv2d_1242 (Conv2D)            (None, 4, 4, 192)    98496       concatenate_158[0][0]            
__________________________________________________________________________________________________
conv2d_1245 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1244[0][0]                
__________________________________________________________________________________________________
conv2d_1250 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1249[0][0]                
__________________________________________________________________________________________________
concatenate_159 (Concatenate)   (None, 4, 4, 512)    0           conv2d_1241[0][0]                
                                                                 conv2d_1242[0][0]                
                                                                 conv2d_1245[0][0]                
                                                                 conv2d_1250[0][0]                
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, 4, 4, 512)    2048        concatenate_159[0][0]            
__________________________________________________________________________________________________
conv2d_1256 (Conv2D)            (None, 4, 4, 96)     49248       batch_normalization_17[0][0]     
__________________________________________________________________________________________________
conv2d_1257 (Conv2D)            (None, 4, 4, 96)     64608       conv2d_1256[0][0]                
__________________________________________________________________________________________________
conv2d_1253 (Conv2D)            (None, 4, 4, 96)     49248       batch_normalization_17[0][0]     
__________________________________________________________________________________________________
conv2d_1258 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1257[0][0]                
__________________________________________________________________________________________________
average_pooling2d_128 (AverageP (None, 4, 4, 512)    0           batch_normalization_17[0][0]     
__________________________________________________________________________________________________
conv2d_1254 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1253[0][0]                
__________________________________________________________________________________________________
conv2d_1259 (Conv2D)            (None, 4, 4, 112)    87920       conv2d_1258[0][0]                
__________________________________________________________________________________________________
conv2d_1251 (Conv2D)            (None, 4, 4, 64)     32832       average_pooling2d_128[0][0]      
__________________________________________________________________________________________________
conv2d_1252 (Conv2D)            (None, 4, 4, 192)    98496       batch_normalization_17[0][0]     
__________________________________________________________________________________________________
conv2d_1255 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1254[0][0]                
__________________________________________________________________________________________________
conv2d_1260 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1259[0][0]                
__________________________________________________________________________________________________
concatenate_160 (Concatenate)   (None, 4, 4, 512)    0           conv2d_1251[0][0]                
                                                                 conv2d_1252[0][0]                
                                                                 conv2d_1255[0][0]                
                                                                 conv2d_1260[0][0]                
__________________________________________________________________________________________________
conv2d_1266 (Conv2D)            (None, 4, 4, 96)     49248       concatenate_160[0][0]            
__________________________________________________________________________________________________
conv2d_1267 (Conv2D)            (None, 4, 4, 96)     64608       conv2d_1266[0][0]                
__________________________________________________________________________________________________
conv2d_1263 (Conv2D)            (None, 4, 4, 96)     49248       concatenate_160[0][0]            
__________________________________________________________________________________________________
conv2d_1268 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1267[0][0]                
__________________________________________________________________________________________________
average_pooling2d_129 (AverageP (None, 4, 4, 512)    0           concatenate_160[0][0]            
__________________________________________________________________________________________________
conv2d_1264 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1263[0][0]                
__________________________________________________________________________________________________
conv2d_1269 (Conv2D)            (None, 4, 4, 112)    87920       conv2d_1268[0][0]                
__________________________________________________________________________________________________
conv2d_1261 (Conv2D)            (None, 4, 4, 64)     32832       average_pooling2d_129[0][0]      
__________________________________________________________________________________________________
conv2d_1262 (Conv2D)            (None, 4, 4, 192)    98496       concatenate_160[0][0]            
__________________________________________________________________________________________________
conv2d_1265 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1264[0][0]                
__________________________________________________________________________________________________
conv2d_1270 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1269[0][0]                
__________________________________________________________________________________________________
concatenate_161 (Concatenate)   (None, 4, 4, 512)    0           conv2d_1261[0][0]                
                                                                 conv2d_1262[0][0]                
                                                                 conv2d_1265[0][0]                
                                                                 conv2d_1270[0][0]                
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, 4, 4, 512)    2048        concatenate_161[0][0]            
__________________________________________________________________________________________________
conv2d_1276 (Conv2D)            (None, 4, 4, 96)     49248       batch_normalization_18[0][0]     
__________________________________________________________________________________________________
conv2d_1277 (Conv2D)            (None, 4, 4, 96)     64608       conv2d_1276[0][0]                
__________________________________________________________________________________________________
conv2d_1273 (Conv2D)            (None, 4, 4, 96)     49248       batch_normalization_18[0][0]     
__________________________________________________________________________________________________
conv2d_1278 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1277[0][0]                
__________________________________________________________________________________________________
average_pooling2d_130 (AverageP (None, 4, 4, 512)    0           batch_normalization_18[0][0]     
__________________________________________________________________________________________________
conv2d_1274 (Conv2D)            (None, 4, 4, 112)    75376       conv2d_1273[0][0]                
__________________________________________________________________________________________________
conv2d_1279 (Conv2D)            (None, 4, 4, 112)    87920       conv2d_1278[0][0]                
__________________________________________________________________________________________________
conv2d_1271 (Conv2D)            (None, 4, 4, 64)     32832       average_pooling2d_130[0][0]      
__________________________________________________________________________________________________
conv2d_1272 (Conv2D)            (None, 4, 4, 192)    98496       batch_normalization_18[0][0]     
__________________________________________________________________________________________________
conv2d_1275 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1274[0][0]                
__________________________________________________________________________________________________
conv2d_1280 (Conv2D)            (None, 4, 4, 128)    100480      conv2d_1279[0][0]                
__________________________________________________________________________________________________
concatenate_162 (Concatenate)   (None, 4, 4, 512)    0           conv2d_1271[0][0]                
                                                                 conv2d_1272[0][0]                
                                                                 conv2d_1275[0][0]                
                                                                 conv2d_1280[0][0]                
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, 4, 4, 512)    2048        concatenate_162[0][0]            
__________________________________________________________________________________________________
conv2d_1283 (Conv2D)            (None, 4, 4, 128)    65664       batch_normalization_19[0][0]     
__________________________________________________________________________________________________
conv2d_1284 (Conv2D)            (None, 4, 4, 128)    114816      conv2d_1283[0][0]                
__________________________________________________________________________________________________
conv2d_1281 (Conv2D)            (None, 4, 4, 96)     49248       batch_normalization_19[0][0]     
__________________________________________________________________________________________________
conv2d_1285 (Conv2D)            (None, 4, 4, 160)    143520      conv2d_1284[0][0]                
__________________________________________________________________________________________________
max_pooling2d_31 (MaxPooling2D) (None, 1, 1, 512)    0           batch_normalization_19[0][0]     
__________________________________________________________________________________________________
conv2d_1282 (Conv2D)            (None, 1, 1, 96)     83040       conv2d_1281[0][0]                
__________________________________________________________________________________________________
conv2d_1286 (Conv2D)            (None, 1, 1, 160)    230560      conv2d_1285[0][0]                
__________________________________________________________________________________________________
concatenate_163 (Concatenate)   (None, 1, 1, 768)    0           max_pooling2d_31[0][0]           
                                                                 conv2d_1282[0][0]                
                                                                 conv2d_1286[0][0]                
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, 1, 1, 768)    3072        concatenate_163[0][0]            
__________________________________________________________________________________________________
conv2d_1292 (Conv2D)            (None, 1, 1, 192)    147648      batch_normalization_20[0][0]     
__________________________________________________________________________________________________
conv2d_1293 (Conv2D)            (None, 1, 1, 224)    129248      conv2d_1292[0][0]                
__________________________________________________________________________________________________
average_pooling2d_131 (AverageP (None, 1, 1, 768)    0           batch_normalization_20[0][0]     
__________________________________________________________________________________________________
conv2d_1289 (Conv2D)            (None, 1, 1, 192)    147648      batch_normalization_20[0][0]     
__________________________________________________________________________________________________
conv2d_1294 (Conv2D)            (None, 1, 1, 256)    172288      conv2d_1293[0][0]                
__________________________________________________________________________________________________
conv2d_1287 (Conv2D)            (None, 1, 1, 128)    98432       average_pooling2d_131[0][0]      
__________________________________________________________________________________________________
conv2d_1288 (Conv2D)            (None, 1, 1, 128)    98432       batch_normalization_20[0][0]     
__________________________________________________________________________________________________
conv2d_1290 (Conv2D)            (None, 1, 1, 128)    73856       conv2d_1289[0][0]                
__________________________________________________________________________________________________
conv2d_1291 (Conv2D)            (None, 1, 1, 128)    73856       conv2d_1289[0][0]                
__________________________________________________________________________________________________
conv2d_1295 (Conv2D)            (None, 1, 1, 128)    98432       conv2d_1294[0][0]                
__________________________________________________________________________________________________
conv2d_1296 (Conv2D)            (None, 1, 1, 128)    98432       conv2d_1294[0][0]                
__________________________________________________________________________________________________
concatenate_164 (Concatenate)   (None, 1, 1, 768)    0           conv2d_1287[0][0]                
                                                                 conv2d_1288[0][0]                
                                                                 conv2d_1290[0][0]                
                                                                 conv2d_1291[0][0]                
                                                                 conv2d_1295[0][0]                
                                                                 conv2d_1296[0][0]                
__________________________________________________________________________________________________
conv2d_1302 (Conv2D)            (None, 1, 1, 192)    147648      concatenate_164[0][0]            
__________________________________________________________________________________________________
conv2d_1303 (Conv2D)            (None, 1, 1, 224)    129248      conv2d_1302[0][0]                
__________________________________________________________________________________________________
average_pooling2d_132 (AverageP (None, 1, 1, 768)    0           concatenate_164[0][0]            
__________________________________________________________________________________________________
conv2d_1299 (Conv2D)            (None, 1, 1, 192)    147648      concatenate_164[0][0]            
__________________________________________________________________________________________________
conv2d_1304 (Conv2D)            (None, 1, 1, 256)    172288      conv2d_1303[0][0]                
__________________________________________________________________________________________________
conv2d_1297 (Conv2D)            (None, 1, 1, 128)    98432       average_pooling2d_132[0][0]      
__________________________________________________________________________________________________
conv2d_1298 (Conv2D)            (None, 1, 1, 128)    98432       concatenate_164[0][0]            
__________________________________________________________________________________________________
conv2d_1300 (Conv2D)            (None, 1, 1, 128)    73856       conv2d_1299[0][0]                
__________________________________________________________________________________________________
conv2d_1301 (Conv2D)            (None, 1, 1, 128)    73856       conv2d_1299[0][0]                
__________________________________________________________________________________________________
conv2d_1305 (Conv2D)            (None, 1, 1, 128)    98432       conv2d_1304[0][0]                
__________________________________________________________________________________________________
conv2d_1306 (Conv2D)            (None, 1, 1, 128)    98432       conv2d_1304[0][0]                
__________________________________________________________________________________________________
concatenate_165 (Concatenate)   (None, 1, 1, 768)    0           conv2d_1297[0][0]                
                                                                 conv2d_1298[0][0]                
                                                                 conv2d_1300[0][0]                
                                                                 conv2d_1301[0][0]                
                                                                 conv2d_1305[0][0]                
                                                                 conv2d_1306[0][0]                
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, 1, 1, 768)    3072        concatenate_165[0][0]            
__________________________________________________________________________________________________
conv2d_1312 (Conv2D)            (None, 1, 1, 192)    147648      batch_normalization_21[0][0]     
__________________________________________________________________________________________________
conv2d_1313 (Conv2D)            (None, 1, 1, 224)    129248      conv2d_1312[0][0]                
__________________________________________________________________________________________________
average_pooling2d_133 (AverageP (None, 1, 1, 768)    0           batch_normalization_21[0][0]     
__________________________________________________________________________________________________
conv2d_1309 (Conv2D)            (None, 1, 1, 192)    147648      batch_normalization_21[0][0]     
__________________________________________________________________________________________________
conv2d_1314 (Conv2D)            (None, 1, 1, 256)    172288      conv2d_1313[0][0]                
__________________________________________________________________________________________________
conv2d_1307 (Conv2D)            (None, 1, 1, 128)    98432       average_pooling2d_133[0][0]      
__________________________________________________________________________________________________
conv2d_1308 (Conv2D)            (None, 1, 1, 128)    98432       batch_normalization_21[0][0]     
__________________________________________________________________________________________________
conv2d_1310 (Conv2D)            (None, 1, 1, 128)    73856       conv2d_1309[0][0]                
__________________________________________________________________________________________________
conv2d_1311 (Conv2D)            (None, 1, 1, 128)    73856       conv2d_1309[0][0]                
__________________________________________________________________________________________________
conv2d_1315 (Conv2D)            (None, 1, 1, 128)    98432       conv2d_1314[0][0]                
__________________________________________________________________________________________________
conv2d_1316 (Conv2D)            (None, 1, 1, 128)    98432       conv2d_1314[0][0]                
__________________________________________________________________________________________________
concatenate_166 (Concatenate)   (None, 1, 1, 768)    0           conv2d_1307[0][0]                
                                                                 conv2d_1308[0][0]                
                                                                 conv2d_1310[0][0]                
                                                                 conv2d_1311[0][0]                
                                                                 conv2d_1315[0][0]                
                                                                 conv2d_1316[0][0]                
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, 1, 1, 768)    3072        concatenate_166[0][0]            
__________________________________________________________________________________________________
average_pooling2d_134 (AverageP (None, 1, 1, 768)    0           batch_normalization_22[0][0]     
__________________________________________________________________________________________________
flatten_8 (Flatten)             (None, 768)          0           average_pooling2d_134[0][0]      
__________________________________________________________________________________________________
dense_16 (Dense)                (None, 256)          196864      flatten_8[0][0]                  
__________________________________________________________________________________________________
dropout_8 (Dropout)             (None, 256)          0           dense_16[0][0]                   
__________________________________________________________________________________________________
dense_17 (Dense)                (None, 2)            514         dropout_8[0][0]                  
==================================================================================================
Total params: 10,505,058
Trainable params: 10,494,178
Non-trainable params: 10,880
__________________________________________________________________________________________________
In [33]:
# Create checkpoint for the training
# This checkpoint performs model saving when
# an epoch gives highest testing accuracy
filepath        = modelname + ".hdf5"
checkpoint      = ModelCheckpoint(filepath, 
                                  monitor='val_acc', 
                                  verbose=0, 
                                  save_best_only=True, 
                                  mode='max')

                            # Log the epoch detail into csv
csv_logger      = CSVLogger(modelname +'.csv')
callbacks_list  = [checkpoint, csv_logger]

# def lrSchedule(epoch):
#     lr  = 1e-3
    
#     if epoch > 270: #190
#         lr  *= 0.5e-3
        
#     elif epoch > 240: #160
#         lr  *= 1e-3
        
#     elif epoch > 200: #140
#         lr  *= 1e-2
        
#     elif epoch > 150: #100
#         lr  *= 1e-1
        
#     print('Learning rate: ', lr)
    
#     return lr

# LRScheduler     = LearningRateScheduler(lrSchedule)

#                             # Create checkpoint for the training
#                             # This checkpoint performs model saving when
#                             # an epoch gives highest testing accuracy
# filepath        = modelname + ".hdf5"
# checkpoint      = ModelCheckpoint(filepath, 
#                                   monitor='val_acc', 
#                                   verbose=0, 
#                                   save_best_only=True, 
#                                   mode='max')

#                             # Log the epoch detail into csv
# csv_logger      = CSVLogger(modelname +'.csv')
# callbacks_list  = [checkpoint, csv_logger, LRScheduler]
In [34]:
# Fit the model
# This is where the training starts
model.fit(trDat, 
          trLbl, 
          validation_data=(tsDat, tsLbl), 
          epochs=60, 
          batch_size=32,
          callbacks=callbacks_list)

# datagen = ImageDataGenerator(width_shift_range=0.25,
#                              height_shift_range=0.25,
#                              rotation_range=45,
#                              zoom_range=0.8,
#                              #zca_epsilon=1e-6,
#                              #zca_whitening=True,
#                              fill_mode='nearest',
#                              horizontal_flip=True,
#                              vertical_flip=False)

# model.fit_generator(datagen.flow(trDat, trLbl, batch_size=16),
#                     validation_data=(tsDat, tsLbl),
#                     epochs=60, #300 
#                     verbose=1,
#                     steps_per_epoch=len(trDat)/16,
#                     callbacks=callbacks_list)
Train on 14784 samples, validate on 3696 samples
Epoch 1/60
14784/14784 [==============================] - 87s 6ms/step - loss: 1.1840 - acc: 0.5967 - val_loss: 1.3401 - val_acc: 0.8041
Epoch 2/60
14784/14784 [==============================] - 67s 5ms/step - loss: 0.4505 - acc: 0.7969 - val_loss: 0.8990 - val_acc: 0.8212
Epoch 3/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.3942 - acc: 0.8375 - val_loss: 0.6357 - val_acc: 0.8436
Epoch 4/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.3560 - acc: 0.8567 - val_loss: 0.3458 - val_acc: 0.8750
Epoch 5/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.3401 - acc: 0.8643 - val_loss: 0.3739 - val_acc: 0.8742
Epoch 6/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.3212 - acc: 0.8716 - val_loss: 0.3047 - val_acc: 0.8777
Epoch 7/60
14784/14784 [==============================] - 67s 5ms/step - loss: 0.3056 - acc: 0.8792 - val_loss: 0.3217 - val_acc: 0.8839
Epoch 8/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.3021 - acc: 0.8804 - val_loss: 0.3132 - val_acc: 0.8975
Epoch 9/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.2864 - acc: 0.8890 - val_loss: 0.3080 - val_acc: 0.8761
Epoch 10/60
14784/14784 [==============================] - 67s 5ms/step - loss: 0.2769 - acc: 0.8891 - val_loss: 0.4172 - val_acc: 0.8222
Epoch 11/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.2669 - acc: 0.8938 - val_loss: 0.2608 - val_acc: 0.9056
Epoch 12/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.2589 - acc: 0.8993 - val_loss: 0.2609 - val_acc: 0.8920
Epoch 13/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.2513 - acc: 0.9037 - val_loss: 0.2877 - val_acc: 0.8823
Epoch 14/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.2448 - acc: 0.9049 - val_loss: 0.3028 - val_acc: 0.8631
Epoch 15/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.2318 - acc: 0.9103 - val_loss: 0.3266 - val_acc: 0.8874
Epoch 16/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.2190 - acc: 0.9150 - val_loss: 0.2515 - val_acc: 0.9031
Epoch 17/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.2116 - acc: 0.9201 - val_loss: 0.2335 - val_acc: 0.9115
Epoch 18/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.2038 - acc: 0.9242 - val_loss: 0.2824 - val_acc: 0.9056
Epoch 19/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.1936 - acc: 0.9274 - val_loss: 0.2522 - val_acc: 0.8991
Epoch 20/60
14784/14784 [==============================] - 69s 5ms/step - loss: 0.1852 - acc: 0.9297 - val_loss: 0.2294 - val_acc: 0.9137
Epoch 21/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.1750 - acc: 0.9333 - val_loss: 0.2999 - val_acc: 0.8888
Epoch 22/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.1651 - acc: 0.9377 - val_loss: 0.3215 - val_acc: 0.9010
Epoch 23/60
14784/14784 [==============================] - 69s 5ms/step - loss: 0.1596 - acc: 0.9385 - val_loss: 0.2865 - val_acc: 0.8918
Epoch 24/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.1453 - acc: 0.9439 - val_loss: 0.7209 - val_acc: 0.8320
Epoch 25/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.1369 - acc: 0.9469 - val_loss: 0.3010 - val_acc: 0.9077
Epoch 26/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.1310 - acc: 0.9494 - val_loss: 0.2976 - val_acc: 0.9034
Epoch 27/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.1240 - acc: 0.9532 - val_loss: 0.3162 - val_acc: 0.8985
Epoch 28/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.1097 - acc: 0.9564 - val_loss: 0.5461 - val_acc: 0.8718
Epoch 29/60
14784/14784 [==============================] - 68s 5ms/step - loss: 0.1131 - acc: 0.9582 - val_loss: 0.5051 - val_acc: 0.8999
Epoch 30/60
 8352/14784 [===============>..............] - ETA: 27s - loss: 0.1044 - acc: 0.9605
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-34-14a099798c15> in <module>
      6           epochs=60,
      7           batch_size=32,
----> 8           callbacks=callbacks_list)
      9 
     10 # datagen = ImageDataGenerator(width_shift_range=0.25,

D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)
   1361           initial_epoch=initial_epoch,
   1362           steps_per_epoch=steps_per_epoch,
-> 1363           validation_steps=validation_steps)
   1364 
   1365   def evaluate(self,

D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\keras\engine\training_arrays.py in fit_loop(model, inputs, targets, sample_weights, batch_size, epochs, verbose, callbacks, val_inputs, val_targets, val_sample_weights, shuffle, callback_metrics, initial_epoch, steps_per_epoch, validation_steps)
    262           ins_batch[i] = ins_batch[i].toarray()
    263 
--> 264         outs = f(ins_batch)
    265         if not isinstance(outs, list):
    266           outs = [outs]

D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\keras\backend.py in __call__(self, inputs)
   2912       self._make_callable(feed_arrays, feed_symbols, symbol_vals, session)
   2913 
-> 2914     fetched = self._callable_fn(*array_vals)
   2915     self._call_fetch_callbacks(fetched[-len(self._fetches):])
   2916     return fetched[:len(self.outputs)]

D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\client\session.py in __call__(self, *args, **kwargs)
   1380           ret = tf_session.TF_SessionRunCallable(
   1381               self._session._session, self._handle, args, status,
-> 1382               run_metadata_ptr)
   1383         if run_metadata:
   1384           proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

KeyboardInterrupt: 
In [35]:
##### Now the training is complete, we get
# another object to load the weights
# compile it, so that we can do 
# final evaluation on it
modelGo.load_weights(filepath)
modelGo.compile(loss='categorical_crossentropy', 
                optimizer='adam', 
                metrics=['accuracy'])
In [36]:
# Make classification on the test dataset
predicts    = modelGo.predict(tsDat)

# Prepare the classification output
# for the classification report
predout     = np.argmax(predicts,axis=1)
testout     = np.argmax(tsLbl,axis=1)
labelname   = ['non-flower', 'flower']
                                            # the labels for the classfication report


testScores  = metrics.accuracy_score(testout,predout)
confusion   = metrics.confusion_matrix(testout,predout)


print("Best accuracy (on testing dataset): %.2f%%" % (testScores*100))
print(metrics.classification_report(testout,predout,target_names=labelname,digits=4))
print(confusion)
Best accuracy (on testing dataset): 91.37%
              precision    recall  f1-score   support

  non-flower     0.8671    0.9205    0.8930      1446
      flower     0.9468    0.9093    0.9277      2250

    accuracy                         0.9137      3696
   macro avg     0.9069    0.9149    0.9103      3696
weighted avg     0.9156    0.9137    0.9141      3696

[[1331  115]
 [ 204 2046]]
In [37]:
import pandas as pd

records     = pd.read_csv(modelname +'.csv')
plt.figure()
plt.subplot(211)
plt.plot(records['val_loss'])
plt.plot(records['loss'])
plt.yticks([0, 0.20, 0.30, 0.4, 0.5])
plt.title('Loss value',fontsize=12)

ax          = plt.gca()
ax.set_xticklabels([])



plt.subplot(212)
plt.plot(records['val_acc'])
plt.plot(records['acc'])
plt.yticks([0.7, 0.8, 0.9, 1.0])
plt.title('Accuracy',fontsize=12)
plt.show()
In [38]:
wrong_ans_index = []

for i in range(len(predout)):
    if predout[i] != testout[i]:
        wrong_ans_index.append(i)
In [39]:
wrong_ans_index = list(set(wrong_ans_index))
In [ ]:
# Randomly show X examples of that was wrong

dataset = tsDatOrg #flowers #fungus #rocks

for index in wrong_ans_index:
    #index = wrong_ans_index[random.randint(0, len(wrong_ans_index)-1)]
    print("Showing %s index image" %(index))
    print("Predicted as %s but is actually %s" %(predout[index], testout[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 2560 index image
Predicted as 1 but is actually 0
Showing 3155 index image
Predicted as 1 but is actually 0
Showing 4 index image
Predicted as 0 but is actually 1
Showing 2053 index image
Predicted as 0 but is actually 1
Showing 3588 index image
Predicted as 1 but is actually 0
Showing 3079 index image
Predicted as 1 but is actually 0
Showing 2056 index image
Predicted as 0 but is actually 1
Showing 2057 index image
Predicted as 0 but is actually 1
Showing 3080 index image
Predicted as 1 but is actually 0
Showing 2062 index image
Predicted as 0 but is actually 1
Showing 15 index image
Predicted as 0 but is actually 1
Showing 17 index image
Predicted as 0 but is actually 1
Showing 2065 index image
Predicted as 0 but is actually 1
Showing 3603 index image
Predicted as 1 but is actually 0
Showing 3667 index image
Predicted as 1 but is actually 0
Showing 1557 index image
Predicted as 0 but is actually 1
Showing 2581 index image
Predicted as 1 but is actually 0
Showing 3094 index image
Predicted as 1 but is actually 0
Showing 536 index image
Predicted as 0 but is actually 1
Showing 3610 index image
Predicted as 1 but is actually 0
Showing 2075 index image
Predicted as 0 but is actually 1
Showing 2589 index image
Predicted as 1 but is actually 0
Showing 1566 index image
Predicted as 0 but is actually 1
Showing 33 index image
Predicted as 0 but is actually 1
Showing 3106 index image
Predicted as 1 but is actually 0
Showing 2083 index image
Predicted as 0 but is actually 1
Showing 1572 index image
Predicted as 0 but is actually 1
Showing 37 index image
Predicted as 0 but is actually 1
Showing 1061 index image
Predicted as 0 but is actually 1
Showing 1065 index image
Predicted as 0 but is actually 1
Showing 1578 index image
Predicted as 0 but is actually 1
Showing 3625 index image
Predicted as 1 but is actually 0
Showing 3629 index image
Predicted as 1 but is actually 0
Showing 564 index image
Predicted as 0 but is actually 1
Showing 1079 index image
Predicted as 0 but is actually 1
Showing 58 index image
Predicted as 0 but is actually 1
Showing 570 index image
Predicted as 0 but is actually 1
Showing 571 index image
Predicted as 0 but is actually 1
Showing 2623 index image
Predicted as 1 but is actually 0
Showing 3136 index image
Predicted as 1 but is actually 0
Showing 70 index image
Predicted as 0 but is actually 1
Showing 1606 index image
Predicted as 0 but is actually 1
Showing 2630 index image
Predicted as 1 but is actually 0
Showing 3656 index image
Predicted as 1 but is actually 0
Showing 3657 index image
Predicted as 1 but is actually 0
Showing 587 index image
Predicted as 0 but is actually 1
Showing 2638 index image
Predicted as 1 but is actually 0
Showing 2127 index image
Predicted as 0 but is actually 1
Showing 3665 index image
Predicted as 1 but is actually 0
Showing 1618 index image
Predicted as 0 but is actually 1
Showing 1619 index image
Predicted as 0 but is actually 1
Showing 2132 index image
Predicted as 0 but is actually 1
Showing 597 index image
Predicted as 0 but is actually 1
Showing 1109 index image
Predicted as 0 but is actually 1
Showing 2134 index image
Predicted as 0 but is actually 1
Showing 88 index image
Predicted as 0 but is actually 1
Showing 89 index image
Predicted as 0 but is actually 1
Showing 2135 index image
Predicted as 0 but is actually 1
Showing 1115 index image
Predicted as 0 but is actually 1
Showing 3070 index image
Predicted as 1 but is actually 0
Showing 3668 index image
Predicted as 1 but is actually 0
Showing 2144 index image
Predicted as 0 but is actually 1
Showing 2656 index image
Predicted as 1 but is actually 0
Showing 610 index image
Predicted as 0 but is actually 1
Showing 2146 index image
Predicted as 0 but is actually 1
Showing 1124 index image
Predicted as 0 but is actually 1
Showing 2150 index image
Predicted as 0 but is actually 1
Showing 3688 index image
Predicted as 1 but is actually 0
Showing 1136 index image
Predicted as 0 but is actually 1
Showing 629 index image
Predicted as 0 but is actually 1
Showing 631 index image
Predicted as 0 but is actually 1
Showing 633 index image
Predicted as 0 but is actually 1
Showing 636 index image
Predicted as 0 but is actually 1
Showing 2172 index image
Predicted as 0 but is actually 1
Showing 3197 index image
Predicted as 1 but is actually 0
Showing 2176 index image
Predicted as 0 but is actually 1
Showing 130 index image
Predicted as 0 but is actually 1
Showing 131 index image
Predicted as 0 but is actually 1
Showing 646 index image
Predicted as 0 but is actually 1
Showing 647 index image
Predicted as 0 but is actually 1
Showing 648 index image
Predicted as 0 but is actually 1
Showing 3207 index image
Predicted as 1 but is actually 0
Showing 138 index image
Predicted as 0 but is actually 1
Showing 1674 index image
Predicted as 0 but is actually 1
Showing 1164 index image
Predicted as 0 but is actually 1
Showing 2189 index image
Predicted as 0 but is actually 1
Showing 3208 index image
Predicted as 1 but is actually 0
Showing 2191 index image
Predicted as 0 but is actually 1
Showing 1168 index image
Predicted as 0 but is actually 1
Showing 146 index image
Predicted as 0 but is actually 1
Showing 662 index image
Predicted as 0 but is actually 1
Showing 2199 index image
Predicted as 0 but is actually 1
Showing 3224 index image
Predicted as 1 but is actually 0
Showing 2713 index image
Predicted as 1 but is actually 0
Showing 154 index image
Predicted as 0 but is actually 1
Showing 156 index image
Predicted as 0 but is actually 1
Showing 1183 index image
Predicted as 0 but is actually 1
Showing 1699 index image
Predicted as 0 but is actually 1
Showing 1189 index image
Predicted as 0 but is actually 1
Showing 2725 index image
Predicted as 1 but is actually 0
Showing 1704 index image
Predicted as 0 but is actually 1
Showing 2216 index image
Predicted as 0 but is actually 1
Showing 171 index image
Predicted as 0 but is actually 1
Showing 3244 index image
Predicted as 1 but is actually 0
Showing 1711 index image
Predicted as 0 but is actually 1
Showing 1715 index image
Predicted as 0 but is actually 1
Showing 3253 index image
Predicted as 1 but is actually 0
Showing 1718 index image
Predicted as 0 but is actually 1
Showing 184 index image
Predicted as 0 but is actually 1
Showing 2233 index image
Predicted as 0 but is actually 1
Showing 186 index image
Predicted as 0 but is actually 1
Showing 2749 index image
Predicted as 1 but is actually 0
Showing 2240 index image
Predicted as 0 but is actually 1
Showing 2752 index image
Predicted as 1 but is actually 0
Showing 706 index image
Predicted as 0 but is actually 1
Showing 1218 index image
Predicted as 0 but is actually 1
Showing 1731 index image
Predicted as 0 but is actually 1
Showing 1221 index image
Predicted as 0 but is actually 1
Showing 2246 index image
Predicted as 0 but is actually 1
Showing 2756 index image
Predicted as 1 but is actually 0
Showing 200 index image
Predicted as 0 but is actually 1
Showing 2249 index image
Predicted as 0 but is actually 1
Showing 2760 index image
Predicted as 1 but is actually 0
Showing 3271 index image
Predicted as 1 but is actually 0
Showing 2764 index image
Predicted as 1 but is actually 0
Showing 2256 index image
Predicted as 1 but is actually 0
Showing 1748 index image
Predicted as 0 but is actually 1
Showing 725 index image
Predicted as 0 but is actually 1
Showing 1240 index image
Predicted as 0 but is actually 1
Showing 2265 index image
Predicted as 1 but is actually 0
Showing 3289 index image
Predicted as 1 but is actually 0
Showing 1243 index image
Predicted as 0 but is actually 1
Showing 220 index image
Predicted as 0 but is actually 1
Showing 1756 index image
Predicted as 0 but is actually 1
Showing 734 index image
Predicted as 0 but is actually 1
Showing 2781 index image
Predicted as 1 but is actually 0
Showing 224 index image
Predicted as 0 but is actually 1
Showing 736 index image
Predicted as 0 but is actually 1
Showing 2273 index image
Predicted as 1 but is actually 0
Showing 2785 index image
Predicted as 1 but is actually 0
Showing 1253 index image
Predicted as 0 but is actually 1
Showing 2789 index image
Predicted as 1 but is actually 0
Showing 3302 index image
Predicted as 1 but is actually 0
Showing 745 index image
Predicted as 0 but is actually 1
Showing 235 index image
Predicted as 0 but is actually 1
Showing 748 index image
Predicted as 0 but is actually 1
Showing 2285 index image
Predicted as 1 but is actually 0
Showing 2998 index image
Predicted as 1 but is actually 0
Showing 3310 index image
Predicted as 1 but is actually 0
Showing 240 index image
Predicted as 0 but is actually 1
Showing 753 index image
Predicted as 0 but is actually 1
Showing 1264 index image
Predicted as 0 but is actually 1
Showing 1267 index image
Predicted as 0 but is actually 1
Showing 3316 index image
Predicted as 1 but is actually 0
Showing 246 index image
Predicted as 0 but is actually 1
Showing 2294 index image
Predicted as 1 but is actually 0
Showing 761 index image
Predicted as 0 but is actually 1
Showing 2297 index image
Predicted as 1 but is actually 0
Showing 2299 index image
Predicted as 1 but is actually 0
Showing 253 index image
Predicted as 0 but is actually 1
Showing 254 index image
Predicted as 0 but is actually 1
Showing 2302 index image
Predicted as 1 but is actually 0
Showing 1797 index image
Predicted as 0 but is actually 1
Showing 781 index image
Predicted as 0 but is actually 1
Showing 270 index image
Predicted as 0 but is actually 1
Showing 271 index image
Predicted as 0 but is actually 1
Showing 1808 index image
Predicted as 0 but is actually 1
Showing 1298 index image
Predicted as 0 but is actually 1
Showing 281 index image
Predicted as 0 but is actually 1
Showing 1824 index image
Predicted as 0 but is actually 1
Showing 2337 index image
Predicted as 1 but is actually 0
Showing 2340 index image
Predicted as 1 but is actually 0
Showing 2853 index image
Predicted as 1 but is actually 0
Showing 3367 index image
Predicted as 1 but is actually 0
Showing 1322 index image
Predicted as 0 but is actually 1
Showing 2858 index image
Predicted as 1 but is actually 0
Showing 1324 index image
Predicted as 0 but is actually 1
Showing 1325 index image
Predicted as 0 but is actually 1
Showing 1836 index image
Predicted as 0 but is actually 1
Showing 2861 index image
Predicted as 1 but is actually 0
Showing 3371 index image
Predicted as 1 but is actually 0
Showing 2355 index image
Predicted as 1 but is actually 0
Showing 3521 index image
Predicted as 1 but is actually 0
Showing 821 index image
Predicted as 0 but is actually 1
Showing 312 index image
Predicted as 0 but is actually 1
Showing 2873 index image
Predicted as 1 but is actually 0
Showing 3384 index image
Predicted as 1 but is actually 0
Showing 1340 index image
Predicted as 0 but is actually 1
Showing 317 index image
Predicted as 0 but is actually 1
Showing 1854 index image
Predicted as 0 but is actually 1
Showing 2879 index image
Predicted as 1 but is actually 0
Showing 3524 index image
Predicted as 1 but is actually 0
Showing 836 index image
Predicted as 0 but is actually 1
Showing 1351 index image
Predicted as 0 but is actually 1
Showing 1354 index image
Predicted as 0 but is actually 1
Showing 2891 index image
Predicted as 1 but is actually 0
Showing 1868 index image
Predicted as 0 but is actually 1
Showing 2384 index image
Predicted as 1 but is actually 0
Showing 3409 index image
Predicted as 1 but is actually 0
Showing 3412 index image
Predicted as 1 but is actually 0
Showing 2390 index image
Predicted as 1 but is actually 0
Showing 2903 index image
Predicted as 1 but is actually 0
Showing 3415 index image
Predicted as 1 but is actually 0
Showing 2905 index image
Predicted as 1 but is actually 0
In [ ]:
# Stacking 3 NNs?